/// Returns the appropriate output directory for the specified package and
/// target.
pub fn out_dir(&self, unit: &Unit<'a>) -> PathBuf {
- if unit.profile.doc {
+ if unit.mode.is_doc() {
self.layout(unit.kind).root().parent().unwrap().join("doc")
} else if unit.target.is_custom_build() {
self.build_script_dir(unit)
/// Returns the appropriate directory layout for either a plugin or not.
pub fn build_script_dir(&self, unit: &Unit<'a>) -> PathBuf {
assert!(unit.target.is_custom_build());
- assert!(!unit.profile.run_custom_build);
+ assert!(!unit.mode.is_run_custom_build());
let dir = self.pkg_dir(unit);
self.layout(Kind::Host).build().join(dir)
}
/// Returns the appropriate directory layout for either a plugin or not.
pub fn build_script_out_dir(&self, unit: &Unit<'a>) -> PathBuf {
assert!(unit.target.is_custom_build());
- assert!(unit.profile.run_custom_build);
+ assert!(unit.mode.is_run_custom_build());
let dir = self.pkg_dir(unit);
self.layout(unit.kind).build().join(dir).join("out")
}
} else {
Some((
out_dir.parent().unwrap().to_owned(),
- if unit.profile.test {
+ if unit.mode.is_any_test() {
file_stem
} else {
bin_stem
let mut ret = Vec::new();
let mut unsupported = Vec::new();
{
- if unit.profile.check {
+ if unit.mode.is_check() {
+ // This is not quite correct for non-lib targets. rustc
+ // currently does not emit rmeta files, so there is nothing to
+ // check for! See #3624.
let path = out_dir.join(format!("lib{}.rmeta", file_stem));
let hardlink = link_stem
.clone()
| TargetKind::Test => {
add("bin", FileFlavor::Normal)?;
}
- TargetKind::Lib(..) | TargetKind::ExampleLib(..) if unit.profile.test => {
+ TargetKind::Lib(..) | TargetKind::ExampleLib(..) if unit.mode.is_any_test() => {
add("bin", FileFlavor::Normal)?;
}
TargetKind::ExampleLib(ref kinds) | TargetKind::Lib(ref kinds) => {
// just here for rustbuild. We need a more principled method
// doing this eventually.
let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA");
- if !(unit.profile.test || unit.profile.check)
+ if !(unit.mode.is_any_test() || unit.mode.is_check())
&& (unit.target.is_dylib() || unit.target.is_cdylib()
|| (unit.target.is_bin() && cx.build_config.target_triple().starts_with("wasm32-")))
&& unit.pkg.package_id().source_id().is_path()
// Throw in the profile we're compiling with. This helps caching
// panic=abort and panic=unwind artifacts, additionally with various
// settings like debuginfo and whatnot.
- unit.profile.hash(&mut hasher);
+ cx.unit_profile(unit).hash(&mut hasher);
+ unit.mode.hash(&mut hasher);
// Artifacts compiled for the host should have a different metadata
// piece than those compiled for the target, so make sure we throw in
#![allow(deprecated)]
-
use std::collections::{HashMap, HashSet};
use std::env;
use std::path::{Path, PathBuf};
use jobserver::Client;
-use core::{Package, PackageId, PackageSet, Profile, Resolve, Target};
-use core::{Dependency, Profiles, Workspace};
+use core::{Package, PackageId, PackageSet, Resolve, Target};
+use core::{Dependency, Workspace};
+use core::profiles::{Profile, ProfileFor, Profiles};
+use ops::CompileMode;
use util::{internal, profile, Cfg, CfgExpr, Config};
use util::errors::{CargoResult, CargoResultExt};
/// example, it needs to know the target architecture (OS, chip arch etc.) and it needs to know
/// whether you want a debug or release build. There is enough information in this struct to figure
/// all that out.
-#[derive(Clone, Copy, Eq, PartialEq, Hash)]
+#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)]
pub struct Unit<'a> {
/// Information about available targets, which files to include/exclude, etc. Basically stuff in
/// `Cargo.toml`.
/// to be confused with *target-triple* (or *target architecture* ...), the target arch for a
/// build.
pub target: &'a Target,
- /// The profile contains information about *how* the build should be run, including debug
- /// level, extra args to pass to rustc, etc.
- pub profile: &'a Profile,
+ /// This indicates the purpose of the target for profile selection. See
+ /// `ProfileFor` for more details.
+ pub profile_for: ProfileFor,
/// Whether this compilation unit is for the host or target architecture.
///
/// For example, when
/// the host architecture so the host rustc can use it (when compiling to the target
/// architecture).
pub kind: Kind,
+ /// The "mode" this unit is being compiled for. See `CompileMode` for
+ /// more details.
+ pub mode: CompileMode,
}
/// The build context, containing all information about a build task
pub links: Links<'a>,
pub used_in_plugin: HashSet<Unit<'a>>,
pub jobserver: Client,
+ pub profiles: &'a Profiles,
+ /// This is a workaround to carry the extra compiler args given on the
+ /// command-line for `cargo rustc` and `cargo rustdoc`. These commands
+ /// only support one target, but we don't want the args passed to any
+ /// dependencies.
+ pub extra_compiler_args: HashMap<Unit<'a>, Vec<String>>,
target_info: TargetInfo,
host_info: TargetInfo,
- profiles: &'a Profiles,
incremental_env: Option<bool>,
unit_dependencies: HashMap<Unit<'a>, Vec<Unit<'a>>>,
+ unit_profiles: HashMap<Unit<'a>, Profile>,
files: Option<CompilationFiles<'a, 'cfg>>,
}
config: &'cfg Config,
build_config: BuildConfig,
profiles: &'a Profiles,
+ extra_compiler_args: HashMap<Unit<'a>, Vec<String>>,
) -> CargoResult<Context<'a, 'cfg>> {
let incremental_env = match env::var("CARGO_INCREMENTAL") {
Ok(v) => Some(v == "1"),
build_script_overridden: HashSet::new(),
unit_dependencies: HashMap::new(),
+ unit_profiles: HashMap::new(),
files: None,
+ extra_compiler_args,
};
cx.compilation.host_dylib_path = cx.host_info.sysroot_libdir.clone();
let mut queue = JobQueue::new(&self);
self.prepare_units(export_dir, units)?;
self.prepare()?;
- self.build_used_in_plugin_map(&units)?;
- custom_build::build_map(&mut self, &units)?;
+ self.build_used_in_plugin_map(units)?;
+ custom_build::build_map(&mut self, units)?;
for unit in units.iter() {
// Build up a list of pending jobs, each of which represent
None => &output.path,
};
- if unit.profile.test {
+ if unit.mode.is_any_test() && !unit.mode.is_check() {
self.compilation.tests.push((
unit.pkg.clone(),
unit.target.kind().clone(),
continue;
}
- if dep.profile.run_custom_build {
+ if dep.mode.is_run_custom_build() {
let out_dir = self.files().build_script_out_dir(dep).display().to_string();
self.compilation
.extra_env
if !dep.target.is_lib() {
continue;
}
- if dep.profile.doc {
+ if dep.mode.is_doc() {
continue;
}
None => None,
};
- let deps = build_unit_dependencies(units, &self)?;
+ let deps = build_unit_dependencies(units, self)?;
self.unit_dependencies = deps;
+ self.unit_profiles = self.profiles.build_unit_profiles(units, self);
let files = CompilationFiles::new(
units,
host_layout,
target_layout,
export_dir,
self.ws,
- &self,
+ self,
);
self.files = Some(files);
Ok(())
// dependencies. However, that code itself calls this method and
// gets a full pre-filtered set of dependencies. This is not super
// obvious, and clear, but it does work at the moment.
- if unit.profile.run_custom_build {
+ if unit.target.is_custom_build() {
let key = (unit.pkg.package_id().clone(), unit.kind);
if self.build_script_overridden.contains(&key) {
return Vec::new();
self.build_config.jobs
}
- pub fn lib_profile(&self) -> &'a Profile {
- let (normal, test) = if self.build_config.release {
- (&self.profiles.release, &self.profiles.bench_deps)
- } else {
- (&self.profiles.dev, &self.profiles.test_deps)
- };
- if self.build_config.test {
- test
- } else {
- normal
- }
- }
-
- pub fn build_script_profile(&self, _pkg: &PackageId) -> &'a Profile {
- // TODO: should build scripts always be built with the same library
- // profile? How is this controlled at the CLI layer?
- self.lib_profile()
- }
-
- pub fn incremental_args(&self, unit: &Unit) -> CargoResult<Vec<String>> {
+ pub fn incremental_args(
+ &self,
+ unit: &Unit,
+ profile_incremental: bool,
+ ) -> CargoResult<Vec<String>> {
// There's a number of ways to configure incremental compilation right
// now. In order of descending priority (first is highest priority) we
// have:
// have it enabled by default while release profiles have it disabled
// by default.
let global_cfg = self.config.get_bool("build.incremental")?.map(|c| c.val);
- let incremental = match (self.incremental_env, global_cfg, unit.profile.incremental) {
+ let incremental = match (self.incremental_env, global_cfg, profile_incremental) {
(Some(v), _, _) => v,
(None, Some(false), _) => false,
(None, _, other) => other,
Kind::Target => &self.target_info,
}
}
+
+ /// Returns the profile for a given unit.
+ /// This should not be called until profiles are computed in
+ /// `prepare_units`.
+ pub fn unit_profile(&self, unit: &Unit<'a>) -> &Profile {
+ &self.unit_profiles[unit]
+ }
}
/// Acquire extra flags to pass to the compiler from various locations.
//! graph of `Unit`s, which capture these properties.
use super::{Context, Kind, Unit};
-use std::collections::HashMap;
-use CargoResult;
use core::dependency::Kind as DepKind;
+use core::profiles::ProfileFor;
use core::Target;
-use core::Profile;
+use ops::CompileMode;
+use std::collections::HashMap;
+use CargoResult;
pub fn build_unit_dependencies<'a, 'cfg>(
roots: &[Unit<'a>],
) -> CargoResult<HashMap<Unit<'a>, Vec<Unit<'a>>>> {
let mut deps = HashMap::new();
for unit in roots.iter() {
- deps_of(unit, cx, &mut deps)?;
+ deps_of(unit, cx, &mut deps, unit.profile_for)?;
}
Ok(deps)
unit: &Unit<'a>,
cx: &Context<'a, 'cfg>,
deps: &'b mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
+ profile_for: ProfileFor,
) -> CargoResult<&'b [Unit<'a>]> {
if !deps.contains_key(unit) {
- let unit_deps = compute_deps(unit, cx, deps)?;
- deps.insert(*unit, unit_deps.clone());
- for unit in unit_deps {
- deps_of(&unit, cx, deps)?;
+ let unit_deps = compute_deps(unit, cx, deps, profile_for)?;
+ let to_insert: Vec<_> = unit_deps.iter().map(|&(unit, _)| unit).collect();
+ deps.insert(*unit, to_insert);
+ for (unit, profile_for) in unit_deps {
+ deps_of(&unit, cx, deps, profile_for)?;
}
}
Ok(deps[unit].as_ref())
/// For a package, return all targets which are registered as dependencies
/// for that package.
+/// This returns a vec of `(Unit, ProfileFor)` pairs. The `ProfileFor`
+/// is the profile type that should be used for dependencies of the unit.
fn compute_deps<'a, 'b, 'cfg>(
unit: &Unit<'a>,
cx: &Context<'a, 'cfg>,
deps: &'b mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
-) -> CargoResult<Vec<Unit<'a>>> {
- if unit.profile.run_custom_build {
+ profile_for: ProfileFor,
+) -> CargoResult<Vec<(Unit<'a>, ProfileFor)>> {
+ if unit.mode.is_run_custom_build() {
return compute_deps_custom_build(unit, cx, deps);
- } else if unit.profile.doc && !unit.profile.test {
+ } else if unit.mode.is_doc() && !unit.mode.is_any_test() {
+ // Note: This does not include Doctest.
return compute_deps_doc(unit, cx);
}
let id = unit.pkg.package_id();
let deps = cx.resolve.deps(id);
- let mut ret = deps
- .filter(|&(_id, deps)| {
- assert!(deps.len() > 0);
- deps.iter().any(|dep| {
- // If this target is a build command, then we only want build
- // dependencies, otherwise we want everything *other than* build
- // dependencies.
- if unit.target.is_custom_build() != dep.is_build() {
- return false;
- }
+ let mut ret = deps.filter(|&(_id, deps)| {
+ assert!(deps.len() > 0);
+ deps.iter().any(|dep| {
+ // If this target is a build command, then we only want build
+ // dependencies, otherwise we want everything *other than* build
+ // dependencies.
+ if unit.target.is_custom_build() != dep.is_build() {
+ return false;
+ }
- // If this dependency is *not* a transitive dependency, then it
- // only applies to test/example targets
- if !dep.is_transitive() && !unit.target.is_test() && !unit.target.is_example()
- && !unit.profile.test
- {
- return false;
- }
+ // If this dependency is *not* a transitive dependency, then it
+ // only applies to test/example targets
+ if !dep.is_transitive() && !unit.target.is_test() && !unit.target.is_example()
+ && !unit.mode.is_any_test()
+ {
+ return false;
+ }
- // If this dependency is only available for certain platforms,
- // make sure we're only enabling it for that platform.
- if !cx.dep_platform_activated(dep, unit.kind) {
- return false;
- }
+ // If this dependency is only available for certain platforms,
+ // make sure we're only enabling it for that platform.
+ if !cx.dep_platform_activated(dep, unit.kind) {
+ return false;
+ }
- // If the dependency is optional, then we're only activating it
- // if the corresponding feature was activated
- if dep.is_optional() && !cx.resolve.features(id).contains(&*dep.name()) {
- return false;
- }
+ // If the dependency is optional, then we're only activating it
+ // if the corresponding feature was activated
+ if dep.is_optional() && !cx.resolve.features(id).contains(&*dep.name()) {
+ return false;
+ }
- // If we've gotten past all that, then this dependency is
- // actually used!
- true
- })
+ // If we've gotten past all that, then this dependency is
+ // actually used!
+ true
})
- .filter_map(|(id, _)| {
- match cx.get_package(id) {
- Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
- let unit = Unit {
+ }).filter_map(|(id, _)| match cx.get_package(id) {
+ Ok(pkg) => pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
+ let mode = check_or_build_mode(&unit.mode, t);
+ Ok((
+ Unit {
pkg,
target: t,
- profile: lib_or_check_profile(unit, t, cx),
+ profile_for,
kind: unit.kind.for_target(t),
- };
- Ok(unit)
- }),
- Err(e) => Some(Err(e)),
- }
+ mode,
+ },
+ profile_for,
+ ))
+ }),
+ Err(e) => Some(Err(e)),
})
.collect::<CargoResult<Vec<_>>>()?;
if unit.target.is_custom_build() {
return Ok(ret);
}
- ret.extend(dep_build_script(unit, cx));
+ ret.extend(dep_build_script(unit));
// If this target is a binary, test, example, etc, then it depends on
// the library of the same package. The call to `resolve.deps` above
// didn't include `pkg` in the return values, so we need to special case
// it here and see if we need to push `(pkg, pkg_lib_target)`.
- if unit.target.is_lib() && !unit.profile.doc {
+ if unit.target.is_lib() && unit.mode != CompileMode::Doctest {
return Ok(ret);
}
- ret.extend(maybe_lib(unit, cx));
-
- // Integration tests/benchmarks require binaries to be built
- if unit.profile.test && (unit.target.is_test() || unit.target.is_bench()) {
- ret.extend(
- unit.pkg
- .targets()
- .iter()
- .filter(|t| {
- let no_required_features = Vec::new();
+ ret.extend(maybe_lib(unit, profile_for));
- t.is_bin() &&
- // Skip binaries with required features that have not been selected.
- t.required_features().unwrap_or(&no_required_features).iter().all(|f| {
- cx.resolve.features(id).contains(f)
- })
- })
- .map(|t| Unit {
- pkg: unit.pkg,
- target: t,
- profile: lib_or_check_profile(unit, t, cx),
- kind: unit.kind.for_target(t),
- }),
- );
- }
Ok(ret)
}
unit: &Unit<'a>,
cx: &Context<'a, 'cfg>,
deps: &mut HashMap<Unit<'a>, Vec<Unit<'a>>>,
-) -> CargoResult<Vec<Unit<'a>>> {
+) -> CargoResult<Vec<(Unit<'a>, ProfileFor)>> {
// When not overridden, then the dependencies to run a build script are:
//
- // 1. Compiling the build script itcx
+ // 1. Compiling the build script itself
// 2. For each immediate dependency of our package which has a `links`
// key, the execution of that build script.
let not_custom_build = unit.pkg
.find(|t| !t.is_custom_build())
.unwrap();
let tmp = Unit {
+ pkg: unit.pkg,
target: not_custom_build,
- profile: &cx.profiles.dev,
- ..*unit
+ // The profile here isn't critical. We are just using this temp unit
+ // for fetching dependencies that might have `links`.
+ profile_for: ProfileFor::Any,
+ kind: unit.kind,
+ mode: CompileMode::Build,
};
- let deps = deps_of(&tmp, cx, deps)?;
+ let deps = deps_of(&tmp, cx, deps, ProfileFor::CustomBuild)?;
Ok(deps.iter()
.filter_map(|unit| {
if !unit.target.linkable() || unit.pkg.manifest().links().is_none() {
return None;
}
- dep_build_script(unit, cx)
+ dep_build_script(unit)
})
- .chain(Some(Unit {
- profile: cx.build_script_profile(unit.pkg.package_id()),
- kind: Kind::Host, // build scripts always compiled for the host
- ..*unit
- }))
+ .chain(Some((
+ Unit {
+ pkg: unit.pkg,
+ target: unit.target,
+ profile_for: ProfileFor::CustomBuild,
+ kind: Kind::Host, // build scripts always compiled for the host
+ mode: CompileMode::Build,
+ },
+ ProfileFor::CustomBuild,
+ )))
.collect())
}
fn compute_deps_doc<'a, 'cfg>(
unit: &Unit<'a>,
cx: &Context<'a, 'cfg>,
-) -> CargoResult<Vec<Unit<'a>>> {
+) -> CargoResult<Vec<(Unit<'a>, ProfileFor)>> {
let deps = cx.resolve
.deps(unit.pkg.package_id())
.filter(|&(_id, deps)| {
- deps.iter().any(|dep| {
- match dep.kind() {
- DepKind::Normal => cx.dep_platform_activated(dep, unit.kind),
- _ => false,
- }
+ deps.iter().any(|dep| match dep.kind() {
+ DepKind::Normal => cx.dep_platform_activated(dep, unit.kind),
+ _ => false,
})
})
.map(|(id, _deps)| cx.get_package(id));
Some(lib) => lib,
None => continue,
};
- ret.push(Unit {
- pkg: dep,
- target: lib,
- profile: lib_or_check_profile(unit, lib, cx),
- kind: unit.kind.for_target(lib),
- });
- if cx.build_config.doc_all {
- ret.push(Unit {
+ // rustdoc only needs rmeta files for regular dependencies.
+ // However, for plugins/proc-macros, deps should be built like normal.
+ let mode = check_or_build_mode(&unit.mode, lib);
+ ret.push((
+ Unit {
pkg: dep,
target: lib,
- profile: &cx.profiles.doc,
+ profile_for: ProfileFor::Any,
kind: unit.kind.for_target(lib),
- });
+ mode,
+ },
+ ProfileFor::Any,
+ ));
+ if let CompileMode::Doc { deps: true } = unit.mode {
+ ret.push((
+ Unit {
+ pkg: dep,
+ target: lib,
+ profile_for: ProfileFor::Any,
+ kind: unit.kind.for_target(lib),
+ mode: unit.mode,
+ },
+ ProfileFor::Any,
+ ));
}
}
// Be sure to build/run the build script for documented libraries as
- ret.extend(dep_build_script(unit, cx));
+ ret.extend(dep_build_script(unit));
// If we document a binary, we need the library available
if unit.target.is_bin() {
- ret.extend(maybe_lib(unit, cx));
+ ret.extend(maybe_lib(unit, ProfileFor::Any));
}
Ok(ret)
}
-fn maybe_lib<'a, 'cfg>(unit: &Unit<'a>, cx: &Context<'a, 'cfg>) -> Option<Unit<'a>> {
- unit.pkg
- .targets()
- .iter()
- .find(|t| t.linkable())
- .map(|t| Unit {
- pkg: unit.pkg,
- target: t,
- profile: lib_or_check_profile(unit, t, cx),
- kind: unit.kind.for_target(t),
- })
+fn maybe_lib<'a>(unit: &Unit<'a>, profile_for: ProfileFor) -> Option<(Unit<'a>, ProfileFor)> {
+ let mode = check_or_build_mode(&unit.mode, unit.target);
+ unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| {
+ (
+ Unit {
+ pkg: unit.pkg,
+ target: t,
+ profile_for,
+ kind: unit.kind.for_target(t),
+ mode,
+ },
+ profile_for,
+ )
+ })
}
/// If a build script is scheduled to be run for the package specified by
/// script itself doesn't have any dependencies, so even in that case a unit
/// of work is still returned. `None` is only returned if the package has no
/// build script.
-fn dep_build_script<'a, 'cfg>(unit: &Unit<'a>, cx: &Context<'a, 'cfg>) -> Option<Unit<'a>> {
+fn dep_build_script<'a>(unit: &Unit<'a>) -> Option<(Unit<'a>, ProfileFor)> {
unit.pkg
.targets()
.iter()
.find(|t| t.is_custom_build())
- .map(|t| Unit {
- pkg: unit.pkg,
- target: t,
- profile: &cx.profiles.custom_build,
- kind: unit.kind,
+ .map(|t| {
+ (
+ Unit {
+ pkg: unit.pkg,
+ target: t,
+ // The profile for *running* the build script will actually be the
+ // target the build script is running for (so that the environment
+ // variables get set correctly). This is overridden in
+ // `Profiles::build_unit_profiles`, so the exact value here isn't
+ // critical.
+ profile_for: ProfileFor::CustomBuild,
+ kind: unit.kind,
+ mode: CompileMode::RunCustomBuild,
+ },
+ ProfileFor::CustomBuild,
+ )
})
}
-fn lib_or_check_profile<'a, 'cfg>(
- unit: &Unit,
- target: &Target,
- cx: &Context<'a, 'cfg>,
-) -> &'a Profile {
- if !target.is_custom_build() && !target.for_host()
- && (unit.profile.check || (unit.profile.doc && !unit.profile.test))
- {
- return &cx.profiles.check;
+/// Choose the correct mode for dependencies.
+fn check_or_build_mode(mode: &CompileMode, target: &Target) -> CompileMode {
+ match *mode {
+ CompileMode::Check { .. } | CompileMode::Doc { .. } => {
+ if target.for_host() {
+ // Plugin and proc-macro targets should be compiled like
+ // normal.
+ CompileMode::Build
+ } else {
+ // Regular dependencies should not be checked with --test.
+ // Regular dependencies of doc targets should emit rmeta only.
+ CompileMode::Check { test: false }
+ }
+ }
+ _ => CompileMode::Build,
}
- cx.lib_profile()
}
}
fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<(Work, Work)> {
- assert!(unit.profile.run_custom_build);
+ assert!(unit.mode.is_run_custom_build());
let dependencies = cx.dep_targets(unit);
let build_script_unit = dependencies
.iter()
- .find(|d| !d.profile.run_custom_build && d.target.is_custom_build())
+ .find(|d| !d.mode.is_run_custom_build() && d.target.is_custom_build())
.expect("running a script not depending on an actual script");
let script_output = cx.files().build_script_dir(build_script_unit);
let build_output = cx.files().build_script_out_dir(unit);
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
- let profile = cx.lib_profile();
let to_exec = to_exec.into_os_string();
let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?;
+ let profile = cx.unit_profile(unit).clone();
cmd.env("OUT_DIR", &build_output)
.env("CARGO_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
},
)
.env("DEBUG", &profile.debuginfo.is_some().to_string())
- .env("OPT_LEVEL", &profile.opt_level)
+ .env("OPT_LEVEL", &profile.opt_level.to_string())
.env(
"PROFILE",
if cx.build_config.release {
dependencies
.iter()
.filter_map(|unit| {
- if unit.profile.run_custom_build {
+ if unit.mode.is_run_custom_build() {
Some((
unit.pkg.manifest().links().unwrap().to_string(),
unit.pkg.package_id().clone(),
let root = cx.files().out_dir(unit);
let mut missing_outputs = false;
- if unit.profile.doc {
+ if unit.mode.is_doc() {
missing_outputs = !root.join(unit.target.crate_name())
.join("index.html")
.exists();
}
}
- let allow_failure = unit.profile.rustc_args.is_some();
+ let allow_failure = cx.extra_compiler_args.get(unit).is_some();
let target_root = cx.files().target_root().to_path_buf();
let write_fingerprint = Work::new(move |_| {
match fingerprint.update_local(&target_root) {
};
let mut deps = deps;
deps.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b));
- let extra_flags = if unit.profile.doc {
+ let extra_flags = if unit.mode.is_doc() {
cx.rustdocflags_args(unit)?
} else {
cx.rustflags_args(unit)?
};
+ let profile_hash = {
+ let profile = cx.unit_profile(unit);
+ util::hash_u64(&(
+ profile,
+ unit.mode,
+ cx.incremental_args(unit, profile.incremental)?,
+ ))
+ };
let fingerprint = Arc::new(Fingerprint {
rustc: util::hash_u64(&cx.build_config.rustc.verbose_version),
target: util::hash_u64(&unit.target),
- profile: util::hash_u64(&(&unit.profile, cx.incremental_args(unit)?)),
+ profile: profile_hash,
// Note that .0 is hashed here, not .1 which is the cwd. That doesn't
// actually affect the output artifact so there's no need to hash it.
path: util::hash_u64(&super::path_args(cx, unit).0),
// responsibility of the source)
fn use_dep_info(unit: &Unit) -> bool {
let path = unit.pkg.summary().source_id().is_path();
- !unit.profile.doc && path
+ !unit.mode.is_doc() && path
}
/// Prepare the necessary work for the fingerprint of a build command.
TargetKind::Bench => "bench",
TargetKind::CustomBuild => "build-script",
};
- let flavor = if unit.profile.test {
+ let flavor = if unit.mode.is_any_test() && !unit.mode.is_check() {
"test-"
- } else if unit.profile.doc {
+ } else if unit.mode.is_doc() {
"doc-"
} else {
""
use crossbeam::{self, Scope};
use jobserver::{Acquired, HelperThread};
-use core::{PackageId, Profile, Target};
+use core::{PackageId, Target};
+use core::profiles::ProfileFor;
+use ops::CompileMode;
use util::{Config, DependencyQueue, Dirty, Fresh, Freshness};
use util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder};
use handle_error;
struct Key<'a> {
pkg: &'a PackageId,
target: &'a Target,
- profile: &'a Profile,
+ profile_for: ProfileFor,
kind: Kind,
+ mode: CompileMode,
}
pub struct JobState<'a> {
Ok(()) => self.finish(key, cx)?,
Err(e) => {
let msg = "The following warnings were emitted during compilation:";
- self.emit_warnings(Some(msg), key, cx)?;
+ self.emit_warnings(Some(msg), &key, cx)?;
if self.active > 0 {
error = Some(format_err!("build failed"));
}
let build_type = if self.is_release { "release" } else { "dev" };
- let profile = cx.lib_profile();
- let mut opt_type = String::from(if profile.opt_level == "0" {
+ // TODO FIXME: We don't know which pkg to display this for!
+ let profile = cx.profiles.base_profile(self.is_release);
+ let mut opt_type = String::from(if profile.opt_level.as_str() == "0" {
"unoptimized"
} else {
"optimized"
Ok(())
}
- fn emit_warnings(&self, msg: Option<&str>, key: Key<'a>, cx: &mut Context) -> CargoResult<()> {
+ fn emit_warnings(&self, msg: Option<&str>, key: &Key<'a>, cx: &mut Context) -> CargoResult<()> {
let output = cx.build_state.outputs.lock().unwrap();
if let Some(output) = output.get(&(key.pkg.clone(), key.kind)) {
if let Some(msg) = msg {
}
fn finish(&mut self, key: Key<'a>, cx: &mut Context) -> CargoResult<()> {
- if key.profile.run_custom_build && cx.show_warnings(key.pkg) {
- self.emit_warnings(None, key, cx)?;
+ if key.mode.is_run_custom_build() && cx.show_warnings(key.pkg) {
+ self.emit_warnings(None, &key, cx)?;
}
let state = self.pending.get_mut(&key).unwrap();
key: &Key<'a>,
fresh: Freshness,
) -> CargoResult<()> {
- if (self.compiled.contains(key.pkg) && !key.profile.doc)
- || (self.documented.contains(key.pkg) && key.profile.doc)
+ if (self.compiled.contains(key.pkg) && !key.mode.is_doc())
+ || (self.documented.contains(key.pkg) && key.mode.is_doc())
{
return Ok(());
}
// Any dirty stage which runs at least one command gets printed as
// being a compiled package
Dirty => {
- if key.profile.doc {
- if !key.profile.test {
+ if key.mode.is_doc() {
+ // Skip Doctest
+ if !key.mode.is_any_test() {
self.documented.insert(key.pkg);
config.shell().status("Documenting", key.pkg)?;
}
} else {
self.compiled.insert(key.pkg);
- if key.profile.check {
+ if key.mode.is_check() {
config.shell().status("Checking", key.pkg)?;
} else {
config.shell().status("Compiling", key.pkg)?;
Key {
pkg: unit.pkg.package_id(),
target: unit.target,
- profile: unit.profile,
+ profile_for: unit.profile_for,
kind: unit.kind,
+ mode: unit.mode,
}
}
let unit = Unit {
pkg: cx.get_package(self.pkg)?,
target: self.target,
- profile: self.profile,
+ profile_for: self.profile_for,
kind: self.kind,
+ mode: self.mode,
};
let targets = cx.dep_targets(&unit);
Ok(targets
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
- "{} => {}/{} => {:?}",
- self.pkg, self.target, self.profile, self.kind
+ "{} => {}/{:?} => {:?}",
+ self.pkg, self.target, self.mode, self.kind
)
}
}
use same_file::is_same_file;
use serde_json;
-use core::{Feature, PackageId, Profile, Target};
-use core::manifest::Lto;
+use core::{Feature, PackageId, Target};
+use core::profiles::{Lto, Profile};
use core::shell::ColorChoice;
+use ops::CompileMode;
use util::{self, machine_message, Config, Freshness, ProcessBuilder, Rustc};
use util::{internal, join_paths, profile};
use util::paths;
pub jobs: u32,
/// Whether we are building for release
pub release: bool,
- /// Whether we are running tests
- pub test: bool,
- /// Whether we are building documentation
- pub doc_all: bool,
/// Whether to print std output in json format (for machine reading)
pub json_messages: bool,
}
host: host_config,
target: target_config,
release: false,
- test: false,
- doc_all: false,
json_messages: false,
})
}
fingerprint::prepare_init(cx, unit)?;
cx.links.validate(cx.resolve, unit)?;
- let (dirty, fresh, freshness) = if unit.profile.run_custom_build {
+ let (dirty, fresh, freshness) = if unit.mode.is_run_custom_build() {
custom_build::prepare(cx, unit)?
- } else if unit.profile.doc && unit.profile.test {
+ } else if unit.mode == CompileMode::Doctest {
// we run these targets later, so this is just a noop for now
(Work::noop(), Work::noop(), Freshness::Fresh)
} else {
let (mut freshness, dirty, fresh) = fingerprint::prepare_target(cx, unit)?;
- let work = if unit.profile.doc {
+ let work = if unit.mode.is_doc() {
rustdoc(cx, unit)?
} else {
rustc(cx, unit, exec)?
// If we are a binary and the package also contains a library, then we
// don't pass the `-l` flags.
let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib());
- let do_rename = unit.target.allows_underscores() && !unit.profile.test;
+ let do_rename = unit.target.allows_underscores() && !unit.mode.is_any_test();
let real_name = unit.target.name().to_string();
let crate_name = unit.target.crate_name();
let export_dir = cx.files().export_dir(unit);
let package_id = unit.pkg.package_id().clone();
let target = unit.target.clone();
- let profile = unit.profile.clone();
+ let profile = cx.unit_profile(unit).clone();
+ let unit_mode = unit.mode;
let features = cx.resolve
.features_sorted(&package_id)
.into_iter()
}
if json_messages {
+ let art_profile = machine_message::ArtifactProfile {
+ opt_level: profile.opt_level.as_str(),
+ debuginfo: profile.debuginfo,
+ debug_assertions: profile.debug_assertions,
+ overflow_checks: profile.overflow_checks,
+ test: unit_mode.is_any_test(),
+ };
+
machine_message::emit(&machine_message::Artifact {
package_id: &package_id,
target: &target,
- profile: &profile,
+ profile: art_profile,
features,
filenames: destinations,
fresh,
rustdoc.arg(format!("--edition={}", &manifest.edition()));
}
- if let Some(ref args) = unit.profile.rustdoc_args {
+ if let Some(args) = cx.extra_compiler_args.get(unit) {
rustdoc.args(args);
}
unit: &Unit<'a>,
crate_types: &[&str],
) -> CargoResult<()> {
+ assert!(!unit.mode.is_run_custom_build());
+
let Profile {
ref opt_level,
ref lto,
codegen_units,
- ref rustc_args,
debuginfo,
debug_assertions,
overflow_checks,
rpath,
- test,
- doc: _doc,
- run_custom_build,
ref panic,
- check,
+ incremental,
..
- } = *unit.profile;
- assert!(!run_custom_build);
+ } = *cx.unit_profile(unit);
+ let test = unit.mode.is_any_test();
cmd.arg("--crate-name").arg(&unit.target.crate_name());
}
}
- if check {
+ if unit.mode.is_check() {
cmd.arg("--emit=dep-info,metadata");
} else {
cmd.arg("--emit=dep-info,link");
cmd.arg("-C").arg("prefer-dynamic");
}
- if opt_level != "0" {
+ if opt_level.as_str() != "0" {
cmd.arg("-C").arg(&format!("opt-level={}", opt_level));
}
cmd.arg("-C").arg(format!("debuginfo={}", debuginfo));
}
- if let Some(ref args) = *rustc_args {
+ if let Some(args) = cx.extra_compiler_args.get(unit) {
cmd.args(args);
}
// -C overflow-checks is implied by the setting of -C debug-assertions,
// so we only need to provide -C overflow-checks if it differs from
// the value of -C debug-assertions we would provide.
- if opt_level != "0" {
+ if opt_level.as_str() != "0" {
if debug_assertions {
cmd.args(&["-C", "debug-assertions=on"]);
if !overflow_checks {
"linker=",
cx.linker(unit.kind).map(|s| s.as_ref()),
);
- cmd.args(&cx.incremental_args(unit)?);
+ cmd.args(&cx.incremental_args(unit, incremental)?);
Ok(())
}
// error in the future, see PR #4797
if !dep_targets
.iter()
- .any(|u| !u.profile.doc && u.target.linkable())
+ .any(|u| !u.mode.is_doc() && u.target.linkable())
{
if let Some(u) = dep_targets
.iter()
- .find(|u| !u.profile.doc && u.target.is_lib())
+ .find(|u| !u.mode.is_doc() && u.target.is_lib())
{
cx.config.shell().warn(format!(
"The package `{}` \
}
for dep in dep_targets {
- if dep.profile.run_custom_build {
+ if dep.mode.is_run_custom_build() {
cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep));
}
- if dep.target.linkable() && !dep.profile.doc {
+ if dep.target.linkable() && !dep.mode.is_doc() {
link_to(cmd, cx, unit, &dep)?;
}
}
// units representing the execution of a build script don't actually
// generate a dep info file, so we just keep on going below
- if !unit.profile.run_custom_build {
+ if !unit.mode.is_run_custom_build() {
// Add dependencies from rustc dep-info output (stored in fingerprint directory)
let dep_info_loc = fingerprint::dep_info_loc(context, unit);
if let Some(paths) = fingerprint::parse_dep_info(unit.pkg, &dep_info_loc)? {
}
} else {
debug!(
- "can't find dep_info for {:?} {:?}",
+ "can't find dep_info for {:?} {}",
unit.pkg.package_id(),
- unit.profile
+ unit.target
);
return Err(internal("dep_info missing"));
}
// Whether a lock file is published with this crate
[unstable] publish_lockfile: bool,
+
+ // Overriding profiles for dependencies.
+ [unstable] profile_overrides: bool,
}
}
use url::Url;
use core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary};
+use core::profiles::Profiles;
use core::{Edition, Feature, Features, WorkspaceConfig};
use core::interning::InternedString;
use util::Config;
}
}
-// Note that most of the fields here are skipped when serializing because we
-// don't want to export them just yet (becomes a public API of Cargo). Others
-// though are definitely needed!
-#[derive(Clone, PartialEq, Eq, Debug, Hash, Serialize)]
-pub struct Profile {
- pub opt_level: String,
- #[serde(skip_serializing)]
- pub lto: Lto,
- #[serde(skip_serializing)]
- pub codegen_units: Option<u32>, // None = use rustc default
- #[serde(skip_serializing)]
- pub rustc_args: Option<Vec<String>>,
- #[serde(skip_serializing)]
- pub rustdoc_args: Option<Vec<String>>,
- pub debuginfo: Option<u32>,
- pub debug_assertions: bool,
- pub overflow_checks: bool,
- #[serde(skip_serializing)]
- pub rpath: bool,
- pub test: bool,
- #[serde(skip_serializing)]
- pub doc: bool,
- #[serde(skip_serializing)]
- pub run_custom_build: bool,
- #[serde(skip_serializing)]
- pub check: bool,
- #[serde(skip_serializing)]
- pub panic: Option<String>,
- #[serde(skip_serializing)]
- pub incremental: bool,
-}
-
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub enum Lto {
- Bool(bool),
- Named(String),
-}
-
-#[derive(Default, Clone, Debug, PartialEq, Eq)]
-pub struct Profiles {
- pub release: Profile,
- pub dev: Profile,
- pub test: Profile,
- pub test_deps: Profile,
- pub bench: Profile,
- pub bench_deps: Profile,
- pub doc: Profile,
- pub custom_build: Profile,
- pub check: Profile,
- pub check_test: Profile,
- pub doctest: Profile,
-}
-
/// Information about a binary, a library, an example, etc. that is part of the
/// package.
#[derive(Clone, Hash, PartialEq, Eq, Debug)]
}
}
}
-
-impl Profile {
- pub fn default_dev() -> Profile {
- Profile {
- debuginfo: Some(2),
- debug_assertions: true,
- overflow_checks: true,
- incremental: true,
- ..Profile::default()
- }
- }
-
- pub fn default_release() -> Profile {
- Profile {
- opt_level: "3".to_string(),
- debuginfo: None,
- ..Profile::default()
- }
- }
-
- pub fn default_test() -> Profile {
- Profile {
- test: true,
- ..Profile::default_dev()
- }
- }
-
- pub fn default_bench() -> Profile {
- Profile {
- test: true,
- ..Profile::default_release()
- }
- }
-
- pub fn default_doc() -> Profile {
- Profile {
- doc: true,
- ..Profile::default_dev()
- }
- }
-
- pub fn default_custom_build() -> Profile {
- Profile {
- run_custom_build: true,
- ..Profile::default_dev()
- }
- }
-
- pub fn default_check() -> Profile {
- Profile {
- check: true,
- ..Profile::default_dev()
- }
- }
-
- pub fn default_check_test() -> Profile {
- Profile {
- check: true,
- test: true,
- ..Profile::default_dev()
- }
- }
-
- pub fn default_doctest() -> Profile {
- Profile {
- doc: true,
- test: true,
- ..Profile::default_dev()
- }
- }
-}
-
-impl Default for Profile {
- fn default() -> Profile {
- Profile {
- opt_level: "0".to_string(),
- lto: Lto::Bool(false),
- codegen_units: None,
- rustc_args: None,
- rustdoc_args: None,
- debuginfo: None,
- debug_assertions: false,
- overflow_checks: false,
- rpath: false,
- test: false,
- doc: false,
- run_custom_build: false,
- check: false,
- panic: None,
- incremental: false,
- }
- }
-}
-
-impl fmt::Display for Profile {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- if self.test {
- write!(f, "Profile(test)")
- } else if self.doc {
- write!(f, "Profile(doc)")
- } else if self.run_custom_build {
- write!(f, "Profile(run)")
- } else if self.check {
- write!(f, "Profile(check)")
- } else {
- write!(f, "Profile(build)")
- }
- }
-}
pub use self::dependency::Dependency;
pub use self::features::{CliUnstable, Edition, Feature, Features};
pub use self::manifest::{EitherManifest, VirtualManifest};
-pub use self::manifest::{LibKind, Manifest, Profile, Profiles, Target, TargetKind};
+pub use self::manifest::{LibKind, Manifest, Target, TargetKind};
pub use self::package::{Package, PackageSet};
pub use self::package_id::PackageId;
pub use self::package_id_spec::PackageIdSpec;
pub mod shell;
pub mod registry;
pub mod compiler;
+pub mod profiles;
mod interning;
mod package_id_spec;
mod workspace;
--- /dev/null
+use std::collections::HashMap;
+use std::{cmp, fmt, hash};
+use ops::CompileMode;
+use util::toml::{StringOrBool, TomlProfile, U32OrBool};
+use core::compiler::Context;
+use core::compiler::Unit;
+use core::interning::InternedString;
+
+/// Collection of all user profiles.
+#[derive(Clone, Debug)]
+pub struct Profiles {
+ dev: ProfileMaker,
+ release: ProfileMaker,
+ test: ProfileMaker,
+ bench: ProfileMaker,
+ doc: ProfileMaker,
+}
+
+impl Profiles {
+ pub fn new(
+ dev: Option<TomlProfile>,
+ release: Option<TomlProfile>,
+ test: Option<TomlProfile>,
+ bench: Option<TomlProfile>,
+ doc: Option<TomlProfile>,
+ ) -> Profiles {
+ Profiles {
+ dev: ProfileMaker {
+ default: Profile::default_dev(),
+ toml: dev,
+ },
+ release: ProfileMaker {
+ default: Profile::default_release(),
+ toml: release,
+ },
+ test: ProfileMaker {
+ default: Profile::default_test(),
+ toml: test,
+ },
+ bench: ProfileMaker {
+ default: Profile::default_bench(),
+ toml: bench,
+ },
+ doc: ProfileMaker {
+ default: Profile::default_doc(),
+ toml: doc,
+ },
+ }
+ }
+
+ /// Retrieve the profile for a target.
+ /// `is_member` is whether or not this package is a member of the
+ /// workspace.
+ fn get_profile(
+ &self,
+ pkg_name: &str,
+ is_member: bool,
+ profile_for: ProfileFor,
+ mode: CompileMode,
+ release: bool,
+ ) -> Profile {
+ let maker = match mode {
+ CompileMode::Test => {
+ if release {
+ &self.bench
+ } else {
+ &self.test
+ }
+ }
+ CompileMode::Build
+ | CompileMode::Check { .. }
+ | CompileMode::Doctest
+ | CompileMode::RunCustomBuild => {
+ // Note: RunCustomBuild doesn't normally use this code path.
+ // `build_unit_profiles` normally ensures that it selects the
+ // ancestor's profile. However `cargo clean -p` can hit this
+ // path.
+ // TODO: I think `cargo clean -p xxx` is not cleaning out
+ // the "OUT_DIR" directory. This is not a new bug.
+ if release {
+ &self.release
+ } else {
+ &self.dev
+ }
+ }
+ CompileMode::Bench => &self.bench,
+ CompileMode::Doc { .. } => &self.doc,
+ };
+ let mut profile = maker.profile_for(pkg_name, is_member, profile_for);
+ // `panic` should not be set for tests/benches, or any of their
+ // dependencies.
+ if profile_for == ProfileFor::TestDependency || mode.is_any_test() {
+ profile.panic = None;
+ }
+ profile
+ }
+
+ /// This returns a generic base profile. This is currently used for the
+ /// `[Finished]` line. It is not entirely accurate, since it doesn't
+ /// select for the package that was actually built.
+ pub fn base_profile(&self, release: bool) -> Profile {
+ if release {
+ self.release.profile_for("", true, ProfileFor::Any)
+ } else {
+ self.dev.profile_for("", true, ProfileFor::Any)
+ }
+ }
+
+ /// Build a mapping from Unit -> Profile for all the given units and all
+ /// of their dependencies.
+ pub fn build_unit_profiles<'a, 'cfg>(
+ &self,
+ units: &[Unit<'a>],
+ cx: &Context<'a, 'cfg>,
+ ) -> HashMap<Unit<'a>, Profile> {
+ let mut result = HashMap::new();
+ for unit in units.iter() {
+ self.build_unit_profiles_rec(unit, None, cx, &mut result);
+ }
+ result
+ }
+
+ fn build_unit_profiles_rec<'a, 'cfg>(
+ &self,
+ unit: &Unit<'a>,
+ parent: Option<&Unit<'a>>,
+ cx: &Context<'a, 'cfg>,
+ map: &mut HashMap<Unit<'a>, Profile>,
+ ) {
+ if !map.contains_key(unit) {
+ let for_unit = if unit.mode.is_run_custom_build() {
+ // The profile for *running* a custom build script is the
+ // target the script is running for. This allows
+ // `custom_build::build_work` to set the correct environment
+ // settings.
+ //
+ // In the case of `cargo clean -p`, it creates artificial
+ // units to compute filenames, without a dependency hierarchy,
+ // so we don't have a parent here. That should be OK, it
+ // only affects the environment variables used to *run*
+ // `build.rs`.
+ parent.unwrap_or(unit)
+ } else {
+ unit
+ };
+ let profile = self.get_profile(
+ &for_unit.pkg.name(),
+ cx.ws.is_member(for_unit.pkg),
+ for_unit.profile_for,
+ for_unit.mode,
+ cx.build_config.release,
+ );
+ map.insert(*unit, profile);
+ let deps = cx.dep_targets(unit);
+ for dep in &deps {
+ self.build_unit_profiles_rec(dep, Some(unit), cx, map);
+ }
+ }
+ }
+}
+
+/// An object used for handling the profile override hierarchy.
+///
+/// The precedence of profiles are (first one wins):
+/// - [profile.dev.overrides.name] - A named package.
+/// - [profile.dev.overrides."*"] - This cannot apply to workspace members.
+/// - [profile.dev.build_override] - This can only apply to `build.rs` scripts
+/// and their dependencies.
+/// - [profile.dev]
+/// - Default (hard-coded) values.
+#[derive(Debug, Clone)]
+struct ProfileMaker {
+ default: Profile,
+ toml: Option<TomlProfile>,
+}
+
+impl ProfileMaker {
+ fn profile_for(&self, pkg_name: &str, is_member: bool, profile_for: ProfileFor) -> Profile {
+ let mut profile = self.default.clone();
+ if let Some(ref toml) = self.toml {
+ merge_profile(&mut profile, toml);
+ if profile_for == ProfileFor::CustomBuild {
+ if let Some(ref build_override) = toml.build_override {
+ merge_profile(&mut profile, build_override);
+ }
+ }
+ if let Some(ref overrides) = toml.overrides {
+ if !is_member {
+ if let Some(star) = overrides.get("*") {
+ merge_profile(&mut profile, star);
+ }
+ }
+ if let Some(byname) = overrides.get(pkg_name) {
+ merge_profile(&mut profile, byname);
+ }
+ }
+ }
+ profile
+ }
+}
+
+fn merge_profile(profile: &mut Profile, toml: &TomlProfile) {
+ if let Some(ref opt_level) = toml.opt_level {
+ profile.opt_level = InternedString::new(&opt_level.0);
+ }
+ match toml.lto {
+ Some(StringOrBool::Bool(b)) => profile.lto = Lto::Bool(b),
+ Some(StringOrBool::String(ref n)) => profile.lto = Lto::Named(n.clone()),
+ None => {}
+ }
+ if toml.codegen_units.is_some() {
+ profile.codegen_units = toml.codegen_units;
+ }
+ match toml.debug {
+ Some(U32OrBool::U32(debug)) => profile.debuginfo = Some(debug),
+ Some(U32OrBool::Bool(true)) => profile.debuginfo = Some(2),
+ Some(U32OrBool::Bool(false)) => profile.debuginfo = None,
+ None => {}
+ }
+ if let Some(debug_assertions) = toml.debug_assertions {
+ profile.debug_assertions = debug_assertions;
+ }
+ if let Some(rpath) = toml.rpath {
+ profile.rpath = rpath;
+ }
+ if let Some(ref panic) = toml.panic {
+ profile.panic = Some(InternedString::new(panic));
+ }
+ if let Some(overflow_checks) = toml.overflow_checks {
+ profile.overflow_checks = overflow_checks;
+ }
+ if let Some(incremental) = toml.incremental {
+ profile.incremental = incremental;
+ }
+}
+
+/// Profile settings used to determine which compiler flags to use for a
+/// target.
+#[derive(Debug, Clone, Eq)]
+pub struct Profile {
+ pub name: &'static str,
+ pub opt_level: InternedString,
+ pub lto: Lto,
+ // None = use rustc default
+ pub codegen_units: Option<u32>,
+ pub debuginfo: Option<u32>,
+ pub debug_assertions: bool,
+ pub overflow_checks: bool,
+ pub rpath: bool,
+ pub incremental: bool,
+ pub panic: Option<InternedString>,
+}
+
+impl Default for Profile {
+ fn default() -> Profile {
+ Profile {
+ name: "",
+ opt_level: InternedString::new("0"),
+ lto: Lto::Bool(false),
+ codegen_units: None,
+ debuginfo: None,
+ debug_assertions: false,
+ overflow_checks: false,
+ rpath: false,
+ incremental: false,
+ panic: None,
+ }
+ }
+}
+
+impl fmt::Display for Profile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "Profile({})", self.name)
+ }
+}
+
+impl hash::Hash for Profile {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: hash::Hasher,
+ {
+ self.comparable().hash(state);
+ }
+}
+
+impl cmp::PartialEq for Profile {
+ fn eq(&self, other: &Self) -> bool {
+ self.comparable() == other.comparable()
+ }
+}
+
+impl Profile {
+ fn default_dev() -> Profile {
+ Profile {
+ name: "dev",
+ debuginfo: Some(2),
+ debug_assertions: true,
+ overflow_checks: true,
+ incremental: true,
+ ..Profile::default()
+ }
+ }
+
+ fn default_release() -> Profile {
+ Profile {
+ name: "release",
+ opt_level: InternedString::new("3"),
+ ..Profile::default()
+ }
+ }
+
+ fn default_test() -> Profile {
+ Profile {
+ name: "test",
+ ..Profile::default_dev()
+ }
+ }
+
+ fn default_bench() -> Profile {
+ Profile {
+ name: "bench",
+ ..Profile::default_release()
+ }
+ }
+
+ fn default_doc() -> Profile {
+ Profile {
+ name: "doc",
+ ..Profile::default_dev()
+ }
+ }
+
+ /// Compare all fields except `name`, which doesn't affect compilation.
+ /// This is necessary for `Unit` deduplication for things like "test" and
+ /// "dev" which are essentially the same.
+ fn comparable(
+ &self,
+ ) -> (
+ &InternedString,
+ &Lto,
+ &Option<u32>,
+ &Option<u32>,
+ &bool,
+ &bool,
+ &bool,
+ &bool,
+ &Option<InternedString>,
+ ) {
+ (
+ &self.opt_level,
+ &self.lto,
+ &self.codegen_units,
+ &self.debuginfo,
+ &self.debug_assertions,
+ &self.overflow_checks,
+ &self.rpath,
+ &self.incremental,
+ &self.panic,
+ )
+ }
+}
+
+/// The link-time-optimization setting.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum Lto {
+ /// False = no LTO
+ /// True = "Fat" LTO
+ Bool(bool),
+ /// Named LTO settings like "thin".
+ Named(String),
+}
+
+/// A flag used in `Unit` to indicate the purpose for the target.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
+pub enum ProfileFor {
+ /// A general-purpose target.
+ Any,
+ /// A target for `build.rs` or any of its dependencies. This enables
+ /// `build_override` profiles for these targets.
+ CustomBuild,
+ /// A target that is a dependency of a test or benchmark. Currently this
+ /// enforces that the `panic` setting is not set.
+ TestDependency,
+}
+
+impl ProfileFor {
+ pub fn all_values() -> Vec<ProfileFor> {
+ vec![
+ ProfileFor::Any,
+ ProfileFor::CustomBuild,
+ ProfileFor::TestDependency,
+ ]
+ }
+}
use url::Url;
use core::registry::PackageRegistry;
-use core::{Dependency, PackageIdSpec, Profile, Profiles};
+use core::{Dependency, PackageIdSpec};
use core::{EitherManifest, Package, SourceId, VirtualManifest};
+use core::profiles::Profiles;
use ops;
use sources::PathSource;
use util::errors::{CargoResult, CargoResultExt};
}
}
+ /// Returns true if the package is a member of the workspace.
+ pub fn is_member(&self, pkg: &Package) -> bool {
+ // TODO: Implement this in a better way.
+ // Maybe make it part of Package?
+ self.members().any(|p| p == pkg)
+ }
+
pub fn is_ephemeral(&self) -> bool {
self.is_ephemeral
}
}
if let Some(ref root_manifest) = self.root_manifest {
- let default_profiles = Profiles {
- release: Profile::default_release(),
- dev: Profile::default_dev(),
- test: Profile::default_test(),
- test_deps: Profile::default_dev(),
- bench: Profile::default_bench(),
- bench_deps: Profile::default_release(),
- doc: Profile::default_doc(),
- custom_build: Profile::default_custom_build(),
- check: Profile::default_check(),
- check_test: Profile::default_check_test(),
- doctest: Profile::default_doctest(),
- };
-
for pkg in self.members()
.filter(|p| p.manifest_path() != root_manifest)
{
- if pkg.manifest().profiles() != &default_profiles {
+ if pkg.manifest().original().has_profiles() {
let message = &format!(
"profiles for the non root package will be ignored, \
specify profiles at the workspace root:\n\
use std::fs;
use std::path::Path;
+use std::collections::HashMap;
-use core::{Profiles, Workspace};
+use core::Workspace;
use core::compiler::{BuildConfig, Context, Kind, Unit};
+use core::profiles::ProfileFor;
use util::Config;
use util::errors::{CargoResult, CargoResultExt};
use util::paths;
-use ops;
+use ops::{self, CompileMode};
pub struct CleanOptions<'a> {
pub config: &'a Config,
// Generate all relevant `Unit` targets for this package
for target in pkg.targets() {
for kind in [Kind::Host, Kind::Target].iter() {
- let Profiles {
- ref release,
- ref dev,
- ref test,
- ref bench,
- ref doc,
- ref custom_build,
- ref test_deps,
- ref bench_deps,
- ref check,
- ref check_test,
- ref doctest,
- } = *profiles;
- let profiles = [
- release,
- dev,
- test,
- bench,
- doc,
- custom_build,
- test_deps,
- bench_deps,
- check,
- check_test,
- doctest,
- ];
- for profile in profiles.iter() {
- units.push(Unit {
- pkg,
- target,
- profile,
- kind: *kind,
- });
+ for mode in CompileMode::all_modes() {
+ for profile_for in ProfileFor::all_values() {
+ units.push(Unit {
+ pkg,
+ target,
+ profile_for,
+ kind: *kind,
+ mode,
+ });
+ }
}
}
}
let mut build_config = BuildConfig::new(config, Some(1), &opts.target, None)?;
build_config.release = opts.release;
- let mut cx = Context::new(ws, &resolve, &packages, opts.config, build_config, profiles)?;
+ let mut cx = Context::new(
+ ws,
+ &resolve,
+ &packages,
+ opts.config,
+ build_config,
+ profiles,
+ HashMap::new(),
+ )?;
cx.prepare_units(None, &units)?;
for unit in units.iter() {
rm_rf(&cx.files().fingerprint_dir(unit), config)?;
if unit.target.is_custom_build() {
- if unit.profile.run_custom_build {
+ if unit.mode.is_run_custom_build() {
rm_rf(&cx.files().build_script_out_dir(unit), config)?;
} else {
rm_rf(&cx.files().build_script_dir(unit), config)?;
//! previously compiled dependency
//!
-use std::collections::HashSet;
+use std::collections::{HashMap, HashSet};
use std::path::{Path, PathBuf};
use std::sync::Arc;
-use core::{Package, Source, Target};
-use core::{PackageId, PackageIdSpec, Profile, Profiles, TargetKind, Workspace};
use core::compiler::{BuildConfig, Compilation, Context, DefaultExecutor, Executor};
use core::compiler::{Kind, Unit};
+use core::profiles::ProfileFor;
use core::resolver::{Method, Resolve};
+use core::{Package, Source, Target};
+use core::{PackageId, PackageIdSpec, TargetKind, Workspace};
use ops;
use util::config::Config;
-use util::{profile, CargoResult, CargoResultExt};
+use util::{lev_distance, profile, CargoResult, CargoResultExt};
/// Contains information about how a package should be compiled.
#[derive(Debug)]
}
}
-#[derive(Clone, Copy, PartialEq, Debug)]
+/// The general "mode" of what to do.
+/// This is used for two purposes. The commands themselves pass this in to
+/// `compile_ws` to tell it the general execution strategy. This influences
+/// the default targets selected. The other use is in the `Unit` struct
+/// to indicate what is being done with a specific target.
+#[derive(Clone, Copy, PartialEq, Debug, Eq, Hash)]
pub enum CompileMode {
+ /// A target being built for a test.
Test,
+ /// Building a target with `rustc` (lib or bin).
Build,
+ /// Building a target with `rustc` to emit `rmeta` metadata only. If
+ /// `test` is true, then it is also compiled with `--test` to check it like
+ /// a test.
Check { test: bool },
+ /// A target being built for a benchmark.
Bench,
+ /// A target that will be documented with `rustdoc`.
+ /// If `deps` is true, then it will also document all dependencies.
Doc { deps: bool },
+ /// A target that will be tested with `rustdoc`.
Doctest,
+ /// A marker for Units that represent the execution of a `build.rs`
+ /// script.
+ RunCustomBuild,
+}
+
+impl CompileMode {
+ /// Returns true if the unit is being checked.
+ pub fn is_check(&self) -> bool {
+ match *self {
+ CompileMode::Check { .. } => true,
+ _ => false,
+ }
+ }
+
+ /// Returns true if this is a doc or doctest. Be careful using this.
+ /// Although both run rustdoc, the dependencies for those two modes are
+ /// very different.
+ pub fn is_doc(&self) -> bool {
+ match *self {
+ CompileMode::Doc { .. } | CompileMode::Doctest => true,
+ _ => false,
+ }
+ }
+
+ /// Returns true if this is any type of test (test, benchmark, doctest, or
+ /// check-test).
+ pub fn is_any_test(&self) -> bool {
+ match *self {
+ CompileMode::Test
+ | CompileMode::Bench
+ | CompileMode::Check { test: true }
+ | CompileMode::Doctest => true,
+ _ => false,
+ }
+ }
+
+ /// Returns true if this is the *execution* of a `build.rs` script.
+ pub fn is_run_custom_build(&self) -> bool {
+ *self == CompileMode::RunCustomBuild
+ }
+
+ /// List of all modes (currently used by `cargo clean -p` for computing
+ /// all possible outputs).
+ pub fn all_modes() -> Vec<CompileMode> {
+ vec![
+ CompileMode::Test,
+ CompileMode::Build,
+ CompileMode::Check { test: true },
+ CompileMode::Check { test: false },
+ CompileMode::Bench,
+ CompileMode::Doc { deps: true },
+ CompileMode::Doc { deps: false },
+ CompileMode::Doctest,
+ CompileMode::RunCustomBuild,
+ ]
+ }
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
.into_path_unlocked();
let mut build_config = BuildConfig::new(config, jobs, &target, Some(rustc_info_cache))?;
build_config.release = release;
- build_config.test = mode == CompileMode::Test || mode == CompileMode::Bench;
build_config.json_messages = message_format == MessageFormat::Json;
- if let CompileMode::Doc { deps } = mode {
- build_config.doc_all = deps;
- }
-
- let profiles = ws.profiles();
+ let default_arch_kind = if build_config.requested_target.is_some() {
+ Kind::Target
+ } else {
+ Kind::Host
+ };
let specs = spec.into_package_id_specs(ws)?;
let features = Method::split_features(features);
})
.collect::<CargoResult<Vec<_>>>()?;
- let mut general_targets = Vec::new();
- let mut package_targets = Vec::new();
-
- match (target_rustc_args, target_rustdoc_args) {
- (&Some(..), _) | (_, &Some(..)) if to_builds.len() != 1 => {
- panic!("`rustc` and `rustdoc` should not accept multiple `-p` flags")
- }
- (&Some(ref args), _) => {
- let all_features =
- resolve_all_features(&resolve_with_overrides, to_builds[0].package_id());
- let targets =
- generate_targets(to_builds[0], profiles, mode, filter, &all_features, release)?;
- if targets.len() == 1 {
- let (target, profile) = targets[0];
- let mut profile = profile.clone();
- profile.rustc_args = Some(args.to_vec());
- general_targets.push((target, profile));
- } else {
- bail!(
- "extra arguments to `rustc` can only be passed to one \
- target, consider filtering\nthe package by passing \
- e.g. `--lib` or `--bin NAME` to specify a single target"
- )
- }
- }
- (&None, &Some(ref args)) => {
- let all_features =
- resolve_all_features(&resolve_with_overrides, to_builds[0].package_id());
- let targets =
- generate_targets(to_builds[0], profiles, mode, filter, &all_features, release)?;
- if targets.len() == 1 {
- let (target, profile) = targets[0];
- let mut profile = profile.clone();
- profile.rustdoc_args = Some(args.to_vec());
- general_targets.push((target, profile));
- } else {
- bail!(
- "extra arguments to `rustdoc` can only be passed to one \
- target, consider filtering\nthe package by passing e.g. \
- `--lib` or `--bin NAME` to specify a single target"
- )
- }
- }
- (&None, &None) => for &to_build in to_builds.iter() {
- let all_features = resolve_all_features(&resolve_with_overrides, to_build.package_id());
- let targets =
- generate_targets(to_build, profiles, mode, filter, &all_features, release)?;
- package_targets.push((to_build, targets));
- },
+ let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) {
+ (&Some(ref args), _) => (Some(args.clone()), "rustc"),
+ (_, &Some(ref args)) => (Some(args.clone()), "rustdoc"),
+ _ => (None, ""),
};
- for &(target, ref profile) in &general_targets {
- for &to_build in to_builds.iter() {
- package_targets.push((to_build, vec![(target, profile)]));
+ if extra_args.is_some() && to_builds.len() != 1 {
+ panic!(
+ "`{}` should not accept multiple `-p` flags",
+ extra_args_name
+ );
+ }
+
+ let profiles = ws.profiles();
+ let mut extra_compiler_args = HashMap::new();
+
+ let units = generate_targets(
+ &to_builds,
+ filter,
+ default_arch_kind,
+ mode,
+ &resolve_with_overrides,
+ )?;
+
+ if let Some(args) = extra_args {
+ if units.len() != 1 {
+ bail!(
+ "extra arguments to `{}` can only be passed to one \
+ target, consider filtering\nthe package by passing \
+ e.g. `--lib` or `--bin NAME` to specify a single target",
+ extra_args_name
+ );
}
+ extra_compiler_args.insert(units[0], args);
}
+
let mut ret = {
let _p = profile::start("compiling");
let mut cx = Context::new(
config,
build_config,
profiles,
+ extra_compiler_args,
)?;
- let units = package_targets
- .iter()
- .flat_map(|&(pkg, ref targets)| {
- let default_kind = if cx.build_config.requested_target.is_some() {
- Kind::Target
- } else {
- Kind::Host
- };
- targets.iter().map(move |&(target, profile)| Unit {
- pkg,
- target,
- profile,
- kind: if target.for_host() {
- Kind::Host
- } else {
- default_kind
- },
- })
- })
- .collect::<Vec<_>>();
cx.compile(&units, export_dir.clone(), &exec)?
};
ret.to_doc_test = to_builds.into_iter().cloned().collect();
return Ok(ret);
-
- fn resolve_all_features(
- resolve_with_overrides: &Resolve,
- package_id: &PackageId,
- ) -> HashSet<String> {
- let mut features = resolve_with_overrides.features(package_id).clone();
-
- // Include features enabled for use by dependencies so targets can also use them with the
- // required-features field when deciding whether to be built or skipped.
- for (dep, _) in resolve_with_overrides.deps(package_id) {
- for feature in resolve_with_overrides.features(dep) {
- features.insert(dep.name().to_string() + "/" + feature);
- }
- }
-
- features
- }
}
impl FilterRule {
..
} => examples.is_specific() || tests.is_specific() || benches.is_specific(),
},
+ CompileMode::RunCustomBuild => panic!("Invalid mode"),
}
}
}
}
-#[derive(Clone, Copy, Debug)]
-struct BuildProposal<'a> {
- target: &'a Target,
- profile: &'a Profile,
- required: bool,
-}
-
-fn generate_default_targets<'a>(
+/// Generates all the base targets for the packages the user has requested to
+/// compile. Dependencies for these targets are computed later in
+/// `unit_dependencies`.
+fn generate_targets<'a>(
+ packages: &[&'a Package],
+ filter: &CompileFilter,
+ default_arch_kind: Kind,
mode: CompileMode,
- targets: &'a [Target],
- profile: &'a Profile,
- dep: &'a Profile,
- required_features_filterable: bool,
-) -> Vec<BuildProposal<'a>> {
- match mode {
- CompileMode::Bench => targets
- .iter()
- .filter(|t| t.benched())
- .map(|t| BuildProposal {
- target: t,
- profile,
- required: !required_features_filterable,
- })
- .collect::<Vec<_>>(),
- CompileMode::Test => {
- let mut base = targets
- .iter()
- .filter(|t| t.tested())
- .map(|t| BuildProposal {
- target: t,
- profile: if t.is_example() { dep } else { profile },
- required: !required_features_filterable,
- })
- .collect::<Vec<_>>();
-
- // Always compile the library if we're testing everything as
- // it'll be needed for doctests
- if let Some(t) = targets.iter().find(|t| t.is_lib()) {
- if t.doctested() {
- base.push(BuildProposal {
- target: t,
- profile: dep,
- required: !required_features_filterable,
- });
+ resolve: &Resolve,
+) -> CargoResult<Vec<Unit<'a>>> {
+ let mut units = Vec::new();
+
+ // Helper for creating a Unit struct.
+ let new_unit =
+ |pkg: &'a Package, target: &'a Target, mode: CompileMode, profile_for: ProfileFor| {
+ let actual_profile_for = if profile_for != ProfileFor::Any {
+ profile_for
+ } else if mode.is_any_test() {
+ // Force dependencies of this unit to not set `panic`.
+ ProfileFor::TestDependency
+ } else {
+ profile_for
+ };
+ let actual_mode = match mode {
+ CompileMode::Test => {
+ if target.is_example() {
+ // Examples are included as regular binaries to verify
+ // that they compile.
+ CompileMode::Build
+ } else {
+ CompileMode::Test
+ }
}
+ CompileMode::Build => match *target.kind() {
+ TargetKind::Test => CompileMode::Test,
+ TargetKind::Bench => CompileMode::Bench,
+ _ => CompileMode::Build,
+ },
+ _ => mode,
+ };
+ let kind = if target.for_host() {
+ Kind::Host
+ } else {
+ default_arch_kind
+ };
+ Unit {
+ pkg,
+ target,
+ profile_for: actual_profile_for,
+ kind,
+ mode: actual_mode,
}
- base
- }
- CompileMode::Build | CompileMode::Check { .. } => targets
- .iter()
- .filter(|t| t.is_bin() || t.is_lib())
- .map(|t| BuildProposal {
- target: t,
- profile,
- required: !required_features_filterable,
- })
- .collect(),
- CompileMode::Doc { .. } => targets
- .iter()
- .filter(|t| {
- t.documented()
- && (!t.is_bin() || !targets.iter().any(|l| l.is_lib() && l.name() == t.name()))
- })
- .map(|t| BuildProposal {
- target: t,
- profile,
- required: !required_features_filterable,
- })
- .collect(),
- CompileMode::Doctest => {
- if let Some(t) = targets.iter().find(|t| t.is_lib()) {
- if t.doctested() {
- return vec![
- BuildProposal {
- target: t,
- profile,
- required: !required_features_filterable,
- },
- ];
+ };
+
+ for pkg in packages {
+ let features = resolve_all_features(resolve, pkg.package_id());
+ // Create a list of proposed targets. The `bool` value indicates
+ // whether or not all required features *must* be present. If false,
+ // and the features are not available, then it will be silently
+ // skipped. Generally, targets specified by name (`--bin foo`) are
+ // required, all others can be silently skipped if features are
+ // missing.
+ let mut proposals: Vec<(Unit<'a>, bool)> = Vec::new();
+
+ match *filter {
+ CompileFilter::Default {
+ required_features_filterable,
+ } => {
+ let default_units = generate_default_targets(pkg.targets(), mode)
+ .iter()
+ .map(|t| {
+ (
+ new_unit(pkg, t, mode, ProfileFor::Any),
+ !required_features_filterable,
+ )
+ })
+ .collect::<Vec<_>>();
+ proposals.extend(default_units);
+ if mode == CompileMode::Test {
+ // Include the lib as it will be required for doctests.
+ if let Some(t) = pkg.targets().iter().find(|t| t.is_lib() && t.doctested()) {
+ proposals.push((
+ new_unit(pkg, t, CompileMode::Build, ProfileFor::TestDependency),
+ false,
+ ));
+ }
}
}
+ CompileFilter::Only {
+ all_targets,
+ lib,
+ ref bins,
+ ref examples,
+ ref tests,
+ ref benches,
+ } => {
+ if lib {
+ if let Some(target) = pkg.targets().iter().find(|t| t.is_lib()) {
+ proposals.push((new_unit(pkg, target, mode, ProfileFor::Any), false));
+ } else if !all_targets {
+ bail!("no library targets found")
+ }
+ }
+ // If --tests was specified, add all targets that would be
+ // generated by `cargo test`.
+ let test_filter = match *tests {
+ FilterRule::All => Target::tested,
+ FilterRule::Just(_) => Target::is_test,
+ };
+ let test_mode = match mode {
+ CompileMode::Build => CompileMode::Test,
+ CompileMode::Check { .. } => CompileMode::Check { test: true },
+ _ => mode,
+ };
+ // If --benches was specified, add all targets that would be
+ // generated by `cargo bench`.
+ let bench_filter = match *benches {
+ FilterRule::All => Target::benched,
+ FilterRule::Just(_) => Target::is_bench,
+ };
+ let bench_mode = match mode {
+ CompileMode::Build => CompileMode::Bench,
+ CompileMode::Check { .. } => CompileMode::Check { test: true },
+ _ => mode,
+ };
- Vec::new()
+ proposals.extend(
+ list_rule_targets(pkg, bins, "bin", Target::is_bin)?
+ .into_iter()
+ .map(|(t, required)| (new_unit(pkg, t, mode, ProfileFor::Any), required))
+ .chain(
+ list_rule_targets(pkg, examples, "example", Target::is_example)?
+ .into_iter()
+ .map(|(t, required)| {
+ (new_unit(pkg, t, mode, ProfileFor::Any), required)
+ }),
+ )
+ .chain(
+ list_rule_targets(pkg, tests, "test", test_filter)?
+ .into_iter()
+ .map(|(t, required)| {
+ (new_unit(pkg, t, test_mode, ProfileFor::Any), required)
+ }),
+ )
+ .chain(
+ list_rule_targets(pkg, benches, "bench", bench_filter)?
+ .into_iter()
+ .map(|(t, required)| {
+ (new_unit(pkg, t, bench_mode, ProfileFor::Any), required)
+ }),
+ )
+ .collect::<Vec<_>>(),
+ );
+ }
}
- }
-}
-/// Given a filter rule and some context, propose a list of targets
-fn propose_indicated_targets<'a>(
- pkg: &'a Package,
- rule: &FilterRule,
- desc: &'static str,
- is_expected_kind: fn(&Target) -> bool,
- profile: &'a Profile,
-) -> CargoResult<Vec<BuildProposal<'a>>> {
- match *rule {
- FilterRule::All => {
- let result = pkg.targets()
- .iter()
- .filter(|t| is_expected_kind(t))
- .map(|t| BuildProposal {
- target: t,
- profile,
- required: false,
- });
- Ok(result.collect())
+ // If any integration tests/benches are being tested, make sure that
+ // binaries are built as well.
+ if !mode.is_check() && proposals.iter().any(|&(ref unit, _)| {
+ unit.mode.is_any_test() && (unit.target.is_test() || unit.target.is_bench())
+ }) {
+ proposals.extend(
+ pkg.targets()
+ .iter()
+ .filter(|t| t.is_bin())
+ .map(|t| (new_unit(pkg, t, CompileMode::Build, ProfileFor::Any), false)),
+ );
}
- FilterRule::Just(ref names) => {
- let mut targets = Vec::new();
- for name in names {
- let target = pkg.targets()
+
+ // Only include targets that are libraries or have all required
+ // features available.
+ for (unit, required) in proposals {
+ let unavailable_features = match unit.target.required_features() {
+ Some(rf) => rf.iter().filter(|f| !features.contains(*f)).collect(),
+ None => Vec::new(),
+ };
+ if unit.target.is_lib() || unavailable_features.is_empty() {
+ units.push(unit);
+ } else if required {
+ let required_features = unit.target.required_features().unwrap();
+ let quoted_required_features: Vec<String> = required_features
.iter()
- .find(|t| t.name() == *name && is_expected_kind(t));
- let t = match target {
- Some(t) => t,
- None => {
- let suggestion = pkg.find_closest_target(name, is_expected_kind);
- match suggestion {
- Some(s) => {
- let suggested_name = s.name();
- bail!(
- "no {} target named `{}`\n\nDid you mean `{}`?",
- desc,
- name,
- suggested_name
- )
- }
- None => bail!("no {} target named `{}`", desc, name),
- }
- }
- };
- debug!("found {} `{}`", desc, name);
- targets.push(BuildProposal {
- target: t,
- profile,
- required: true,
- });
+ .map(|s| format!("`{}`", s))
+ .collect();
+ bail!(
+ "target `{}` requires the features: {}\n\
+ Consider enabling them by passing e.g. `--features=\"{}\"`",
+ unit.target.name(),
+ quoted_required_features.join(", "),
+ required_features.join(" ")
+ );
}
- Ok(targets)
+ // else, silently skip target.
}
}
+ Ok(units)
}
-/// Collect the targets that are libraries or have all required features available.
-fn filter_compatible_targets<'a>(
- mut proposals: Vec<BuildProposal<'a>>,
- features: &HashSet<String>,
-) -> CargoResult<Vec<(&'a Target, &'a Profile)>> {
- let mut compatible = Vec::with_capacity(proposals.len());
- for proposal in proposals.drain(..) {
- let unavailable_features = match proposal.target.required_features() {
- Some(rf) => rf.iter().filter(|f| !features.contains(*f)).collect(),
- None => Vec::new(),
- };
- if proposal.target.is_lib() || unavailable_features.is_empty() {
- compatible.push((proposal.target, proposal.profile));
- } else if proposal.required {
- let required_features = proposal.target.required_features().unwrap();
- let quoted_required_features: Vec<String> = required_features
+fn resolve_all_features(
+ resolve_with_overrides: &Resolve,
+ package_id: &PackageId,
+) -> HashSet<String> {
+ let mut features = resolve_with_overrides.features(package_id).clone();
+
+ // Include features enabled for use by dependencies so targets can also use them with the
+ // required-features field when deciding whether to be built or skipped.
+ for (dep, _) in resolve_with_overrides.deps(package_id) {
+ for feature in resolve_with_overrides.features(dep) {
+ features.insert(dep.name().to_string() + "/" + feature);
+ }
+ }
+
+ features
+}
+
+/// Given a list of all targets for a package, filters out only the targets
+/// that are automatically included when the user doesn't specify any targets.
+fn generate_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> {
+ match mode {
+ CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(),
+ CompileMode::Test => targets.iter().filter(|t| t.tested()).collect(),
+ CompileMode::Build | CompileMode::Check { .. } => targets
+ .iter()
+ .filter(|t| t.is_bin() || t.is_lib())
+ .collect(),
+ CompileMode::Doc { .. } => {
+ // `doc` does lib and bins (bin with same name as lib is skipped).
+ targets
.iter()
- .map(|s| format!("`{}`", s))
- .collect();
- bail!(
- "target `{}` requires the features: {}\n\
- Consider enabling them by passing e.g. `--features=\"{}\"`",
- proposal.target.name(),
- quoted_required_features.join(", "),
- required_features.join(" ")
- );
+ .filter(|t| {
+ t.documented()
+ && (!t.is_bin()
+ || !targets.iter().any(|l| l.is_lib() && l.name() == t.name()))
+ })
+ .collect()
}
+ CompileMode::Doctest => {
+ // `test --doc``
+ targets
+ .iter()
+ .find(|t| t.is_lib() && t.doctested())
+ .into_iter()
+ .collect()
+ }
+ CompileMode::RunCustomBuild => panic!("Invalid mode"),
}
- Ok(compatible)
}
-/// Given the configuration for a build, this function will generate all
-/// target/profile combinations needed to be built.
-fn generate_targets<'a>(
+/// Returns a list of targets based on command-line target selection flags.
+/// The return value is a list of `(Target, bool)` pairs. The `bool` value
+/// indicates whether or not all required features *must* be present.
+fn list_rule_targets<'a>(
pkg: &'a Package,
- profiles: &'a Profiles,
- mode: CompileMode,
- filter: &CompileFilter,
- features: &HashSet<String>,
- release: bool,
-) -> CargoResult<Vec<(&'a Target, &'a Profile)>> {
- let build = if release {
- &profiles.release
- } else {
- &profiles.dev
- };
- let test = if release {
- &profiles.bench
- } else {
- &profiles.test
- };
- let profile = match mode {
- CompileMode::Test => test,
- CompileMode::Bench => &profiles.bench,
- CompileMode::Build => build,
- CompileMode::Check { test: false } => &profiles.check,
- CompileMode::Check { test: true } => &profiles.check_test,
- CompileMode::Doc { .. } => &profiles.doc,
- CompileMode::Doctest => &profiles.doctest,
- };
-
- let test_profile = if profile.check {
- &profiles.check_test
- } else if mode == CompileMode::Build {
- test
- } else {
- profile
- };
-
- let bench_profile = if profile.check {
- &profiles.check_test
- } else if mode == CompileMode::Build {
- &profiles.bench
- } else {
- profile
- };
+ rule: &FilterRule,
+ target_desc: &'static str,
+ is_expected_kind: fn(&Target) -> bool,
+) -> CargoResult<Vec<(&'a Target, bool)>> {
+ match *rule {
+ FilterRule::All => Ok(pkg.targets()
+ .iter()
+ .filter(|t| is_expected_kind(t))
+ .map(|t| (t, false))
+ .collect()),
+ FilterRule::Just(ref names) => names
+ .iter()
+ .map(|name| find_target(pkg, name, target_desc, is_expected_kind))
+ .collect(),
+ }
+}
- let targets = match *filter {
- CompileFilter::Default {
- required_features_filterable,
- } => {
- let deps = if release {
- &profiles.bench_deps
- } else {
- &profiles.test_deps
- };
- generate_default_targets(
- mode,
- pkg.targets(),
- profile,
- deps,
- required_features_filterable,
- )
- }
- CompileFilter::Only {
- all_targets,
- lib,
- ref bins,
- ref examples,
- ref tests,
- ref benches,
- } => {
- let mut targets = Vec::new();
-
- if lib {
- if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
- targets.push(BuildProposal {
- target: t,
- profile,
- required: true,
- });
- } else if !all_targets {
- bail!("no library targets found")
- }
+/// Find the target for a specifically named target.
+fn find_target<'a>(
+ pkg: &'a Package,
+ target_name: &str,
+ target_desc: &'static str,
+ is_expected_kind: fn(&Target) -> bool,
+) -> CargoResult<(&'a Target, bool)> {
+ match pkg.targets()
+ .iter()
+ .find(|t| t.name() == target_name && is_expected_kind(t))
+ {
+ // When a target is specified by name, required features *must* be
+ // available.
+ Some(t) => Ok((t, true)),
+ None => {
+ let suggestion = pkg.targets()
+ .iter()
+ .filter(|t| is_expected_kind(t))
+ .map(|t| (lev_distance(target_name, t.name()), t))
+ .filter(|&(d, _)| d < 4)
+ .min_by_key(|t| t.0)
+ .map(|t| t.1);
+ match suggestion {
+ Some(s) => bail!(
+ "no {} target named `{}`\n\nDid you mean `{}`?",
+ target_desc,
+ target_name,
+ s.name()
+ ),
+ None => bail!("no {} target named `{}`", target_desc, target_name),
}
- targets.append(&mut propose_indicated_targets(
- pkg,
- bins,
- "bin",
- Target::is_bin,
- profile,
- )?);
- targets.append(&mut propose_indicated_targets(
- pkg,
- examples,
- "example",
- Target::is_example,
- profile,
- )?);
- // If --tests was specified, add all targets that would be
- // generated by `cargo test`.
- let test_filter = match *tests {
- FilterRule::All => Target::tested,
- FilterRule::Just(_) => Target::is_test,
- };
- targets.append(&mut propose_indicated_targets(
- pkg,
- tests,
- "test",
- test_filter,
- test_profile,
- )?);
- // If --benches was specified, add all targets that would be
- // generated by `cargo bench`.
- let bench_filter = match *benches {
- FilterRule::All => Target::benched,
- FilterRule::Just(_) => Target::is_bench,
- };
- targets.append(&mut propose_indicated_targets(
- pkg,
- benches,
- "bench",
- bench_filter,
- bench_profile,
- )?);
- targets
}
- };
-
- filter_compatible_targets(targets, features)
+ }
}
use serde::ser;
use serde_json::{self, Value};
-use core::{PackageId, Profile, Target};
+use core::{PackageId, Target};
pub trait Message: ser::Serialize {
fn reason(&self) -> &str;
pub struct Artifact<'a> {
pub package_id: &'a PackageId,
pub target: &'a Target,
- pub profile: &'a Profile,
+ pub profile: ArtifactProfile,
pub features: Vec<String>,
pub filenames: Vec<String>,
pub fresh: bool,
}
}
+/// This is different from the regular `Profile` to maintain backwards
+/// compatibility (in particular, `test` is no longer in `Profile`, but we
+/// still want it to be included here).
+#[derive(Serialize)]
+pub struct ArtifactProfile {
+ pub opt_level: &'static str,
+ pub debuginfo: Option<u32>,
+ pub debug_assertions: bool,
+ pub overflow_checks: bool,
+ pub test: bool,
+}
+
#[derive(Serialize)]
pub struct BuildScript<'a> {
pub package_id: &'a PackageId,
use toml;
use url::Url;
-use core::{GitReference, PackageIdSpec, Profiles, SourceId, WorkspaceConfig, WorkspaceRootConfig};
+use core::{GitReference, PackageIdSpec, SourceId, WorkspaceConfig, WorkspaceRootConfig};
use core::{Dependency, Manifest, PackageId, Summary, Target};
use core::{Edition, EitherManifest, Feature, Features, VirtualManifest};
use core::dependency::{Kind, Platform};
-use core::manifest::{LibKind, Lto, ManifestMetadata, Profile};
+use core::manifest::{LibKind, ManifestMetadata};
+use core::profiles::Profiles;
use sources::CRATES_IO;
use util::paths;
use util::{self, Config, ToUrl};
release: Option<TomlProfile>,
}
+impl TomlProfiles {
+ fn validate(&self, features: &Features) -> CargoResult<()> {
+ if let Some(ref test) = self.test {
+ test.validate("test", features)?;
+ }
+ if let Some(ref doc) = self.doc {
+ doc.validate("doc", features)?;
+ }
+ if let Some(ref bench) = self.bench {
+ bench.validate("bench", features)?;
+ }
+ if let Some(ref dev) = self.dev {
+ dev.validate("dev", features)?;
+ }
+ if let Some(ref release) = self.release {
+ release.validate("release", features)?;
+ }
+ Ok(())
+ }
+}
+
#[derive(Clone, Debug)]
-pub struct TomlOptLevel(String);
+pub struct TomlOptLevel(pub String);
impl<'de> de::Deserialize<'de> for TomlOptLevel {
fn deserialize<D>(d: D) -> Result<TomlOptLevel, D::Error>
}
#[derive(Deserialize, Serialize, Clone, Debug, Default)]
+#[serde(rename_all = "kebab-case")]
pub struct TomlProfile {
- #[serde(rename = "opt-level")]
- opt_level: Option<TomlOptLevel>,
- lto: Option<StringOrBool>,
- #[serde(rename = "codegen-units")]
- codegen_units: Option<u32>,
- debug: Option<U32OrBool>,
- #[serde(rename = "debug-assertions")]
- debug_assertions: Option<bool>,
- rpath: Option<bool>,
- panic: Option<String>,
- #[serde(rename = "overflow-checks")]
- overflow_checks: Option<bool>,
- incremental: Option<bool>,
+ pub opt_level: Option<TomlOptLevel>,
+ pub lto: Option<StringOrBool>,
+ pub codegen_units: Option<u32>,
+ pub debug: Option<U32OrBool>,
+ pub debug_assertions: Option<bool>,
+ pub rpath: Option<bool>,
+ pub panic: Option<String>,
+ pub overflow_checks: Option<bool>,
+ pub incremental: Option<bool>,
+ pub overrides: Option<BTreeMap<String, TomlProfile>>,
+ #[serde(rename = "build_override")]
+ pub build_override: Option<Box<TomlProfile>>,
+}
+
+impl TomlProfile {
+ fn validate(&self, name: &str, features: &Features) -> CargoResult<()> {
+ if let Some(ref profile) = self.build_override {
+ features.require(Feature::profile_overrides())?;
+ profile.validate_override()?;
+ }
+ if let Some(ref override_map) = self.overrides {
+ features.require(Feature::profile_overrides())?;
+ for profile in override_map.values() {
+ profile.validate_override()?;
+ }
+ }
+
+ match name {
+ "dev" | "release" => {}
+ _ => {
+ if self.overrides.is_some() || self.build_override.is_some() {
+ bail!(
+ "Profile overrides may only be specified for `dev`
+ or `release` profile, not {}.",
+ name
+ );
+ }
+ }
+ }
+ Ok(())
+ }
+
+ fn validate_override(&self) -> CargoResult<()> {
+ if self.overrides.is_some() || self.build_override.is_some() {
+ bail!("Profile overrides cannot be nested.");
+ }
+ if self.panic.is_some() {
+ bail!("`panic` may not be specified in a build override.")
+ }
+ if self.lto.is_some() {
+ bail!("`lto` may not be specified in a build override.")
+ }
+ if self.rpath.is_some() {
+ bail!("`rpath` may not be specified in a build override.")
+ }
+ Ok(())
+ }
}
#[derive(Clone, Debug, Serialize)]
`[workspace]`, only one can be specified"
),
};
+ if let Some(ref profiles) = me.profile {
+ profiles.validate(&features)?;
+ }
let profiles = build_profiles(&me.profile);
let publish = match project.publish {
Some(VecStringOrBool::VecString(ref vecstring)) => {
}
}
}
+
+ pub fn has_profiles(&self) -> bool {
+ self.profile.is_some()
+ }
}
/// Will check a list of build targets, and make sure the target names are unique within a vector.
fn build_profiles(profiles: &Option<TomlProfiles>) -> Profiles {
let profiles = profiles.as_ref();
- let mut profiles = Profiles {
- release: merge(
- Profile::default_release(),
- profiles.and_then(|p| p.release.as_ref()),
- ),
- dev: merge(
- Profile::default_dev(),
- profiles.and_then(|p| p.dev.as_ref()),
- ),
- test: merge(
- Profile::default_test(),
- profiles.and_then(|p| p.test.as_ref()),
- ),
- test_deps: merge(
- Profile::default_dev(),
- profiles.and_then(|p| p.dev.as_ref()),
- ),
- bench: merge(
- Profile::default_bench(),
- profiles.and_then(|p| p.bench.as_ref()),
- ),
- bench_deps: merge(
- Profile::default_release(),
- profiles.and_then(|p| p.release.as_ref()),
- ),
- doc: merge(
- Profile::default_doc(),
- profiles.and_then(|p| p.doc.as_ref()),
- ),
- custom_build: Profile::default_custom_build(),
- check: merge(
- Profile::default_check(),
- profiles.and_then(|p| p.dev.as_ref()),
- ),
- check_test: merge(
- Profile::default_check_test(),
- profiles.and_then(|p| p.dev.as_ref()),
- ),
- doctest: Profile::default_doctest(),
- };
- // The test/bench targets cannot have panic=abort because they'll all get
- // compiled with --test which requires the unwind runtime currently
- profiles.test.panic = None;
- profiles.bench.panic = None;
- profiles.test_deps.panic = None;
- profiles.bench_deps.panic = None;
- return profiles;
-
- fn merge(profile: Profile, toml: Option<&TomlProfile>) -> Profile {
- let &TomlProfile {
- ref opt_level,
- ref lto,
- codegen_units,
- ref debug,
- debug_assertions,
- rpath,
- ref panic,
- ref overflow_checks,
- ref incremental,
- } = match toml {
- Some(toml) => toml,
- None => return profile,
- };
- let debug = match *debug {
- Some(U32OrBool::U32(debug)) => Some(Some(debug)),
- Some(U32OrBool::Bool(true)) => Some(Some(2)),
- Some(U32OrBool::Bool(false)) => Some(None),
- None => None,
- };
- Profile {
- opt_level: opt_level
- .clone()
- .unwrap_or(TomlOptLevel(profile.opt_level))
- .0,
- lto: match *lto {
- Some(StringOrBool::Bool(b)) => Lto::Bool(b),
- Some(StringOrBool::String(ref n)) => Lto::Named(n.clone()),
- None => profile.lto,
- },
- codegen_units,
- rustc_args: None,
- rustdoc_args: None,
- debuginfo: debug.unwrap_or(profile.debuginfo),
- debug_assertions: debug_assertions.unwrap_or(profile.debug_assertions),
- overflow_checks: overflow_checks.unwrap_or(profile.overflow_checks),
- rpath: rpath.unwrap_or(profile.rpath),
- test: profile.test,
- doc: profile.doc,
- run_custom_build: profile.run_custom_build,
- check: profile.check,
- panic: panic.clone().or(profile.panic),
- incremental: incremental.unwrap_or(profile.incremental),
- }
- }
+ Profiles::new(
+ profiles.and_then(|p| p.dev.clone()),
+ profiles.and_then(|p| p.release.clone()),
+ profiles.and_then(|p| p.test.clone()),
+ profiles.and_then(|p| p.bench.clone()),
+ profiles.and_then(|p| p.doc.clone()),
+ )
}
...
rust = "2018"
```
+
+
+### Profile Overrides
+* Tracking Issue: [rust-lang/rust#48683](https://github.com/rust-lang/rust/issues/48683)
+* RFC: [#2282](https://github.com/rust-lang/rfcs/blob/master/text/2282-profile-dependencies.md)
+
+Profiles can be overridden for specific packages and custom build scripts.
+The general format looks like this:
+
+```toml
+cargo-features = ["profile-overrides"]
+
+[package]
+...
+
+[profile.dev]
+opt-level = 0
+debug = true
+
+# the `image` crate will be compiled with -Copt-level=3
+[profile.dev.overrides.image]
+opt-level = 3
+
+# All dependencies (but not this crate itself) will be compiled
+# with -Copt-level=2 . This includes build dependencies.
+[profile.dev.overrides."*"]
+opt-level = 2
+
+# Build scripts and their dependencies will be compiled with -Copt-level=3
+# By default, build scripts use the same rules as the rest of the profile
+[profile.dev.build_override]
+opt-level = 3
+```
+
+Overrides can only be specified for dev and release profiles.
.with_stderr_contains("[..] --crate-name foo src[/]lib.rs [..] --test [..]")
.with_stderr_contains("[..] --crate-name foo src[/]lib.rs --crate-type lib [..]")
.with_stderr_contains("[..] --crate-name foo src[/]main.rs [..] --test [..]")
- .with_stderr_contains("[..] --crate-name foo src[/]main.rs --crate-type bin [..]")
+ // .with_stderr_contains("[..] --crate-name foo src[/]main.rs --crate-type bin [..]")
.with_stderr_contains("[..]unused_unit_lib[..]")
.with_stderr_contains("[..]unused_unit_bin[..]")
.with_stderr_contains("[..]unused_normal_lib[..]")
.with_stderr_contains("[..]unused_unit_ex1[..]")
.with_stderr_does_not_contain("[..]unused_normal_b1[..]")
.with_stderr_does_not_contain("[..]unused_unit_b1[..]"),
+ // with_stderr_does_not_contain --crate-type lib
+ // with_stderr_does_not_contain --crate-type bin
);
p.root().join("target").rm_rf();
assert_that(
execs()
.with_status(0)
.with_stderr_contains("[..]unused_normal_lib[..]")
- .with_stderr_contains("[..]unused_normal_bin[..]")
+ // .with_stderr_contains("[..]unused_normal_bin[..]")
.with_stderr_contains("[..]unused_unit_t1[..]")
.with_stderr_does_not_contain("[..]unused_unit_lib[..]")
.with_stderr_does_not_contain("[..]unused_unit_bin[..]")
use cargotest::support::registry::Package;
use hamcrest::{assert_that, existing_dir, existing_file, is_not};
use cargo::util::ProcessError;
+use glob::glob;
#[test]
fn simple() {
assert_that(&p.root().join("target/doc/foo/index.html"), existing_file());
assert_that(&p.root().join("target/doc/bar/index.html"), existing_file());
+ // Verify that it only emits rmeta for the dependency.
+ assert_eq!(
+ glob(&p.root().join("target/debug/**/*.rlib").to_str().unwrap())
+ .unwrap()
+ .count(),
+ 0
+ );
+ assert_eq!(
+ glob(&p.root().join("target/debug/deps/libbar-*.rmeta").to_str().unwrap())
+ .unwrap()
+ .count(),
+ 1
+ );
+
assert_that(
p.cargo("doc")
.env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint"),
use cargotest::is_nightly;
use cargotest::support::{execs, project};
+use cargotest::ChannelChanger;
use hamcrest::assert_that;
#[test]
),
);
}
+
+#[test]
+fn dep_override_gated() {
+ let p = project("foo")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.dev.build_override]
+ opt-level = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(101).with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ feature `profile-overrides` is required
+
+consider adding `cargo-features = [\"profile-overrides\"]` to the manifest
+",
+ ),
+ );
+
+ let p = project("foo")
+ .file(
+ "Cargo.toml",
+ r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [profile.dev.overrides."*"]
+ opt-level = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .build();
+
+ assert_that(
+ p.cargo("build").masquerade_as_nightly_cargo(),
+ execs().with_status(101).with_stderr(
+ "\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ feature `profile-overrides` is required
+
+consider adding `cargo-features = [\"profile-overrides\"]` to the manifest
+",
+ ),
+ );
+}
+
+#[test]
+fn dep_override_basic() {
+ let p = project("foo")
+ .file(
+ "Cargo.toml",
+ r#"
+ cargo-features = ["profile-overrides"]
+
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = {path = "bar"}
+
+ [profile.dev.overrides.bar]
+ opt-level = 3
+ "#,
+ )
+ .file("src/lib.rs", "")
+ .file(
+ "bar/Cargo.toml",
+ r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ "#,
+ )
+ .file("bar/src/lib.rs", "")
+ .build();
+
+ assert_that(
+ p.cargo("build -v").masquerade_as_nightly_cargo(),
+ execs().with_status(0).with_stderr(
+"[COMPILING] bar v0.0.1 ([..])
+[RUNNING] `rustc --crate-name bar [..] -C opt-level=3 [..]`
+[COMPILING] foo v0.0.1 ([..])
+[RUNNING] `rustc --crate-name foo [..]`
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
+ )
+ // TODO: does_not_contain does not support patterns!
+ // .with_stderr_does_not_contain("\
+ // `rustc --crate-name bar[..]-C opt-level=3"),
+ );
+
+}
)
.build();
+ // TODO FIXME: This needs to better verify that examples are not built.
assert_that(
prj.cargo("test").arg("--tests"),
execs()
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]mybin-[..][EXE]
-[RUNNING] target[/]debug[/]deps[/]mytest-[..][EXE]
-[RUNNING] target[/]debug[/]examples[/]myexm-[..][EXE]",
+[RUNNING] target[/]debug[/]deps[/]mytest-[..][EXE]",
dir = prj.url()
))
.with_stdout_contains("test test_in_test ... ok"),
)
.build();
+ // TODO FIXME - verify example does NOT get run.
assert_that(
prj.cargo("test").arg("--examples"),
execs().with_status(0).with_stderr(format!(
"\
[COMPILING] foo v0.0.1 ({dir})
-[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
-[RUNNING] target[/]debug[/]examples[/]myexm-[..][EXE]",
+[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
dir = prj.url()
)),
);